import pandas as pd
import yfinance as yf
import datetime
from datetime import date, timedelta
today = date.today()
d1 = today.strftime("%Y-%m-%d")
end_date = d1
d2 = date.today() - timedelta(days=5000)
d2 = d2.strftime("%Y-%m-%d")
start_date = d2
data = yf.download('NFLX',
start=start_date,
end=end_date,
progress=False)
data["Date"] = data.index
data = data[["Date", "Open", "High", "Low", "Close", "Adj Close", "Volume"]]
data.reset_index(drop=True, inplace=True)
print(data.tail())
Date Open High Low Close Adj Close \
3443 2023-11-06 434.380005 435.029999 429.609985 434.739990 434.739990
3444 2023-11-07 436.179993 437.640015 431.000000 434.609985 434.609985
3445 2023-11-08 435.000000 438.070007 433.679993 436.649994 436.649994
3446 2023-11-09 438.029999 440.380005 434.350006 435.149994 435.149994
3447 2023-11-10 437.480011 447.480011 435.510010 447.239990 447.239990
Volume
3443 3003200
3444 3291100
3445 2356800
3446 2735500
3447 4440600
import plotly.graph_objects as go
figure = go.Figure(data=[go.Candlestick(x=data["Date"],
open=data["Open"],
high=data["High"],
low=data["Low"],
close=data["Close"])])
figure.update_layout(title = "Netflix Stock Price Analysis",
xaxis_rangeslider_visible=False)
figure.show()
correlation = data.corr()
print(correlation["Close"].sort_values(ascending=False))
Close 1.000000 Adj Close 1.000000 High 0.999792 Low 0.999778 Open 0.999528 Volume -0.490298 Name: Close, dtype: float64
x = data[["Open", "High", "Low", "Volume"]]
y = data["Close"]
x = x.to_numpy()
y = y.to_numpy()
y = y.reshape(-1, 1)
from sklearn.model_selection import train_test_split
xtrain, xtest, ytrain, ytest = train_test_split(x, y,
test_size=0.2,
random_state=42)
from keras.models import Sequential
from keras.layers import Dense, LSTM
model = Sequential()
model.add(LSTM(128, return_sequences=True, input_shape= (xtrain.shape[1], 1)))
model.add(LSTM(64, return_sequences=False))
model.add(Dense(25))
model.add(Dense(1))
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 4, 128) 66560
lstm_1 (LSTM) (None, 64) 49408
dense (Dense) (None, 25) 1625
dense_1 (Dense) (None, 1) 26
=================================================================
Total params: 117619 (459.45 KB)
Trainable params: 117619 (459.45 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
model.compile(optimizer='adam', loss='mean_squared_error')
model.fit(xtrain, ytrain, batch_size=1, epochs=30)
Epoch 1/30 2758/2758 [==============================] - 20s 6ms/step - loss: 6857.9448 Epoch 2/30 2758/2758 [==============================] - 14s 5ms/step - loss: 564.1664 Epoch 3/30 2758/2758 [==============================] - 15s 5ms/step - loss: 354.1405 Epoch 4/30 2758/2758 [==============================] - 12s 4ms/step - loss: 315.8818 Epoch 5/30 2758/2758 [==============================] - 12s 4ms/step - loss: 297.4737 Epoch 6/30 2758/2758 [==============================] - 12s 4ms/step - loss: 344.0251 Epoch 7/30 2758/2758 [==============================] - 13s 5ms/step - loss: 260.8701 Epoch 8/30 2758/2758 [==============================] - 16s 6ms/step - loss: 298.8172 Epoch 9/30 2758/2758 [==============================] - 12s 4ms/step - loss: 254.7805 Epoch 10/30 2758/2758 [==============================] - 12s 4ms/step - loss: 211.7159 Epoch 11/30 2758/2758 [==============================] - 12s 4ms/step - loss: 257.2849 Epoch 12/30 2758/2758 [==============================] - 12s 4ms/step - loss: 201.3425 Epoch 13/30 2758/2758 [==============================] - 12s 4ms/step - loss: 408.3354 Epoch 14/30 2758/2758 [==============================] - 11s 4ms/step - loss: 293.3828 Epoch 15/30 2758/2758 [==============================] - 12s 4ms/step - loss: 209.1372 Epoch 16/30 2758/2758 [==============================] - 12s 4ms/step - loss: 165.1132 Epoch 17/30 2758/2758 [==============================] - 13s 5ms/step - loss: 122.7720 Epoch 18/30 2758/2758 [==============================] - 16s 6ms/step - loss: 222.4572 Epoch 19/30 2758/2758 [==============================] - 14s 5ms/step - loss: 210.3791 Epoch 20/30 2758/2758 [==============================] - 15s 5ms/step - loss: 214.5449 Epoch 21/30 2758/2758 [==============================] - 15s 6ms/step - loss: 180.4480 Epoch 22/30 2758/2758 [==============================] - 15s 5ms/step - loss: 207.5143 Epoch 23/30 2758/2758 [==============================] - 14s 5ms/step - loss: 187.5416 Epoch 24/30 2758/2758 [==============================] - 15s 5ms/step - loss: 189.6663 Epoch 25/30 2758/2758 [==============================] - 16s 6ms/step - loss: 302.6812 Epoch 26/30 2758/2758 [==============================] - 13s 5ms/step - loss: 334.1971 Epoch 27/30 2758/2758 [==============================] - 15s 5ms/step - loss: 207.9412 Epoch 28/30 2758/2758 [==============================] - 14s 5ms/step - loss: 183.3840 Epoch 29/30 2758/2758 [==============================] - 13s 5ms/step - loss: 173.9512 Epoch 30/30 2758/2758 [==============================] - 20s 7ms/step - loss: 180.8767
<keras.src.callbacks.History at 0x2827564f880>
import numpy as np
#features = [Open, High, Low, Adj Close, Volume]
features = np.array([[401.970001, 427.700012, 398.200012, 20047500]])
model.predict(features)
1/1 [==============================] - 2s 2s/step
array([[404.91757]], dtype=float32)